header%20ipynb.png

Package version :
  1. Tensorflow 2.4.1
  2. Keras 2.4.3
  3. Matplotlib 3.5.0
  4. Scikit-learn 0.23.2
In [1]:
## Just disables the warning, doesn't take advantage of AVX/FMA to run faster
# import os
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
In [2]:
from sklearn import datasets
from sklearn.model_selection import train_test_split

iris = datasets.load_iris()

X = iris.data
y = iris.target
In [3]:
X_train, X_test, Y_train, Y_test = train_test_split(X, y, test_size=.10)
X_train, X_val, Y_train, Y_val = train_test_split(X_train, Y_train, test_size=.15)
print('X_train', X_train.shape)
print('X_val', X_val.shape)
print('X_test', X_test.shape)
X_train (114, 4)
X_val (21, 4)
X_test (15, 4)
In [4]:
from sklearn.neural_network import MLPClassifier

mlp = MLPClassifier(hidden_layer_sizes=(64, ), activation='relu',max_iter=1000, epsilon=1e-08)
In [5]:
from sklearn.metrics import accuracy_score

mlp.fit(X_train, Y_train)
prediksi_val = mlp.predict(X_val)
acc_val = accuracy_score(Y_val, prediksi_val)
print('Akurasi Validasi Training ANN:', acc_val)
Akurasi Validasi Training ANN: 0.9523809523809523
In [6]:
prediksi_test = mlp.predict(X_test)
acc_test = accuracy_score(Y_test, prediksi_test)
print('Akurasi Testing ANN:', acc_test)
Akurasi Testing ANN: 1.0
In [7]:
from sklearn.metrics import accuracy_score, plot_confusion_matrix

prediksi = mlp.predict(X_test)
plot_confusion_matrix(mlp, X_test, Y_test)
accuracy = accuracy_score(Y_test, prediksi)
print('Akurasi Testing ANN:', accuracy)
C:\ProgramData\Miniconda3\lib\site-packages\sklearn\utils\deprecation.py:87: FutureWarning: Function plot_confusion_matrix is deprecated; Function `plot_confusion_matrix` is deprecated in 1.0 and will be removed in 1.2. Use one of the class methods: ConfusionMatrixDisplay.from_predictions or ConfusionMatrixDisplay.from_estimator.
  warnings.warn(msg, category=FutureWarning)
Akurasi Testing ANN: 1.0
In [8]:
# from keras.utils import to_categorical

from tensorflow.keras.utils import to_categorical


Y_train = to_categorical(Y_train,3)
Y_val = to_categorical(Y_val,3)
Y_test = to_categorical(Y_test,3)
In [9]:
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Flatten, Dense

model = Sequential()
model.add(Flatten())
model.add(Dense(64,activation='relu'))
model.add(Dense(3,activation='softmax'))

model.compile(optimizer='adam',loss='categorical_crossentropy', metrics=['acc'])
In [10]:
model.fit(X_train,Y_train,epochs=100,batch_size=10,validation_data=(X_val,Y_val))
Epoch 1/100
12/12 [==============================] - 2s 50ms/step - loss: 1.0494 - acc: 0.3684 - val_loss: 0.9466 - val_acc: 0.6190
Epoch 2/100
12/12 [==============================] - 0s 9ms/step - loss: 0.9070 - acc: 0.6930 - val_loss: 0.9150 - val_acc: 0.5714
Epoch 3/100
12/12 [==============================] - 0s 8ms/step - loss: 0.8045 - acc: 0.6842 - val_loss: 0.8252 - val_acc: 0.5714
Epoch 4/100
12/12 [==============================] - 0s 8ms/step - loss: 0.7260 - acc: 0.7018 - val_loss: 0.7388 - val_acc: 0.7619
Epoch 5/100
12/12 [==============================] - 0s 8ms/step - loss: 0.6487 - acc: 0.9211 - val_loss: 0.6922 - val_acc: 0.6190
Epoch 6/100
12/12 [==============================] - 0s 8ms/step - loss: 0.6005 - acc: 0.7281 - val_loss: 0.6570 - val_acc: 0.5714
Epoch 7/100
12/12 [==============================] - 0s 8ms/step - loss: 0.5531 - acc: 0.8070 - val_loss: 0.6275 - val_acc: 0.6190
Epoch 8/100
12/12 [==============================] - 0s 8ms/step - loss: 0.5218 - acc: 0.8947 - val_loss: 0.5950 - val_acc: 0.8571
Epoch 9/100
12/12 [==============================] - 0s 8ms/step - loss: 0.5037 - acc: 0.8333 - val_loss: 0.5797 - val_acc: 0.6667
Epoch 10/100
12/12 [==============================] - 0s 6ms/step - loss: 0.4853 - acc: 0.9386 - val_loss: 0.5512 - val_acc: 0.9048
Epoch 11/100
12/12 [==============================] - 0s 5ms/step - loss: 0.4576 - acc: 0.9211 - val_loss: 0.5555 - val_acc: 0.6190
Epoch 12/100
12/12 [==============================] - 0s 5ms/step - loss: 0.4463 - acc: 0.8333 - val_loss: 0.5317 - val_acc: 0.8571
Epoch 13/100
12/12 [==============================] - 0s 5ms/step - loss: 0.4256 - acc: 0.9386 - val_loss: 0.5145 - val_acc: 0.9048
Epoch 14/100
12/12 [==============================] - 0s 5ms/step - loss: 0.4105 - acc: 0.9737 - val_loss: 0.5001 - val_acc: 0.9048
Epoch 15/100
12/12 [==============================] - 0s 6ms/step - loss: 0.3968 - acc: 0.9649 - val_loss: 0.4960 - val_acc: 0.9048
Epoch 16/100
12/12 [==============================] - 0s 6ms/step - loss: 0.3852 - acc: 0.9737 - val_loss: 0.4800 - val_acc: 0.9048
Epoch 17/100
12/12 [==============================] - 0s 6ms/step - loss: 0.3755 - acc: 0.9561 - val_loss: 0.4716 - val_acc: 0.9048
Epoch 18/100
12/12 [==============================] - 0s 5ms/step - loss: 0.3635 - acc: 0.9649 - val_loss: 0.4585 - val_acc: 0.9048
Epoch 19/100
12/12 [==============================] - 0s 6ms/step - loss: 0.3552 - acc: 0.9649 - val_loss: 0.4560 - val_acc: 0.9048
Epoch 20/100
12/12 [==============================] - 0s 6ms/step - loss: 0.3426 - acc: 0.9737 - val_loss: 0.4431 - val_acc: 0.9048
Epoch 21/100
12/12 [==============================] - 0s 5ms/step - loss: 0.3340 - acc: 0.9737 - val_loss: 0.4333 - val_acc: 0.9048
Epoch 22/100
12/12 [==============================] - 0s 6ms/step - loss: 0.3260 - acc: 0.9649 - val_loss: 0.4320 - val_acc: 0.9048
Epoch 23/100
12/12 [==============================] - 0s 5ms/step - loss: 0.3165 - acc: 0.9649 - val_loss: 0.4124 - val_acc: 0.9048
Epoch 24/100
12/12 [==============================] - ETA: 0s - loss: 0.2860 - acc: 1.000 - 0s 6ms/step - loss: 0.3076 - acc: 0.9649 - val_loss: 0.4237 - val_acc: 0.9048
Epoch 25/100
12/12 [==============================] - 0s 7ms/step - loss: 0.3012 - acc: 0.9649 - val_loss: 0.4085 - val_acc: 0.9048
Epoch 26/100
12/12 [==============================] - 0s 6ms/step - loss: 0.2906 - acc: 0.9737 - val_loss: 0.3905 - val_acc: 0.9048
Epoch 27/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2834 - acc: 0.9737 - val_loss: 0.3926 - val_acc: 0.9524
Epoch 28/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2754 - acc: 0.9737 - val_loss: 0.3783 - val_acc: 0.9524
Epoch 29/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2690 - acc: 0.9737 - val_loss: 0.3841 - val_acc: 0.9048
Epoch 30/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2624 - acc: 0.9737 - val_loss: 0.3722 - val_acc: 0.9524
Epoch 31/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2557 - acc: 0.9649 - val_loss: 0.3560 - val_acc: 0.9048
Epoch 32/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2475 - acc: 0.9737 - val_loss: 0.3699 - val_acc: 0.9048
Epoch 33/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2452 - acc: 0.9737 - val_loss: 0.3560 - val_acc: 0.9524
Epoch 34/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2431 - acc: 0.9737 - val_loss: 0.3585 - val_acc: 0.9048
Epoch 35/100
12/12 [==============================] - 0s 6ms/step - loss: 0.2297 - acc: 0.9737 - val_loss: 0.3367 - val_acc: 0.9048
Epoch 36/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2252 - acc: 0.9737 - val_loss: 0.3309 - val_acc: 0.9048
Epoch 37/100
12/12 [==============================] - 0s 6ms/step - loss: 0.2195 - acc: 0.9737 - val_loss: 0.3309 - val_acc: 0.9524
Epoch 38/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2172 - acc: 0.9737 - val_loss: 0.3183 - val_acc: 0.9048
Epoch 39/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2105 - acc: 0.9737 - val_loss: 0.3430 - val_acc: 0.9048
Epoch 40/100
12/12 [==============================] - 0s 5ms/step - loss: 0.2057 - acc: 0.9737 - val_loss: 0.3171 - val_acc: 0.9524
Epoch 41/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1981 - acc: 0.9737 - val_loss: 0.3048 - val_acc: 0.9048
Epoch 42/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1937 - acc: 0.9737 - val_loss: 0.3111 - val_acc: 0.9524
Epoch 43/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1900 - acc: 0.9737 - val_loss: 0.3060 - val_acc: 0.9524
Epoch 44/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1847 - acc: 0.9737 - val_loss: 0.3021 - val_acc: 0.9524
Epoch 45/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1824 - acc: 0.9737 - val_loss: 0.2960 - val_acc: 0.9524
Epoch 46/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1807 - acc: 0.9737 - val_loss: 0.3024 - val_acc: 0.9524
Epoch 47/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1740 - acc: 0.9737 - val_loss: 0.2887 - val_acc: 0.9524
Epoch 48/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1690 - acc: 0.9737 - val_loss: 0.2975 - val_acc: 0.9524
Epoch 49/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1765 - acc: 0.9737 - val_loss: 0.2894 - val_acc: 0.9524
Epoch 50/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1670 - acc: 0.9737 - val_loss: 0.2677 - val_acc: 0.9048
Epoch 51/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1603 - acc: 0.9825 - val_loss: 0.2946 - val_acc: 0.9524
Epoch 52/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1570 - acc: 0.9737 - val_loss: 0.2673 - val_acc: 0.9048
Epoch 53/100
12/12 [==============================] - 0s 6ms/step - loss: 0.1565 - acc: 0.9737 - val_loss: 0.2663 - val_acc: 0.9048
Epoch 54/100
12/12 [==============================] - 0s 6ms/step - loss: 0.1500 - acc: 0.9737 - val_loss: 0.2700 - val_acc: 0.9524
Epoch 55/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1480 - acc: 0.9737 - val_loss: 0.2595 - val_acc: 0.9048
Epoch 56/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1501 - acc: 0.9737 - val_loss: 0.2714 - val_acc: 0.9524
Epoch 57/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1446 - acc: 0.9825 - val_loss: 0.2542 - val_acc: 0.9048
Epoch 58/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1421 - acc: 0.9737 - val_loss: 0.2636 - val_acc: 0.9524
Epoch 59/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1363 - acc: 0.9737 - val_loss: 0.2558 - val_acc: 0.9524
Epoch 60/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1352 - acc: 0.9737 - val_loss: 0.2442 - val_acc: 0.9048
Epoch 61/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1322 - acc: 0.9737 - val_loss: 0.2551 - val_acc: 0.9524
Epoch 62/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1303 - acc: 0.9737 - val_loss: 0.2535 - val_acc: 0.9524
Epoch 63/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1266 - acc: 0.9737 - val_loss: 0.2396 - val_acc: 0.9048
Epoch 64/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1251 - acc: 0.9737 - val_loss: 0.2437 - val_acc: 0.9524
Epoch 65/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1228 - acc: 0.9737 - val_loss: 0.2535 - val_acc: 0.9524
Epoch 66/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1213 - acc: 0.9737 - val_loss: 0.2389 - val_acc: 0.9048
Epoch 67/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1209 - acc: 0.9737 - val_loss: 0.2469 - val_acc: 0.9524
Epoch 68/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1171 - acc: 0.9737 - val_loss: 0.2385 - val_acc: 0.9524
Epoch 69/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1160 - acc: 0.9737 - val_loss: 0.2385 - val_acc: 0.9524
Epoch 70/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1145 - acc: 0.9737 - val_loss: 0.2329 - val_acc: 0.9524
Epoch 71/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1111 - acc: 0.9737 - val_loss: 0.2267 - val_acc: 0.9048
Epoch 72/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1110 - acc: 0.9737 - val_loss: 0.2343 - val_acc: 0.9524
Epoch 73/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1090 - acc: 0.9737 - val_loss: 0.2340 - val_acc: 0.9524
Epoch 74/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1083 - acc: 0.9737 - val_loss: 0.2219 - val_acc: 0.9048
Epoch 75/100
12/12 [==============================] - 0s 6ms/step - loss: 0.1082 - acc: 0.9737 - val_loss: 0.2335 - val_acc: 0.9524
Epoch 76/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1029 - acc: 0.9737 - val_loss: 0.2189 - val_acc: 0.9048
Epoch 77/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1043 - acc: 0.9825 - val_loss: 0.2260 - val_acc: 0.9524
Epoch 78/100
12/12 [==============================] - 0s 5ms/step - loss: 0.1009 - acc: 0.9737 - val_loss: 0.2222 - val_acc: 0.9524
Epoch 79/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0997 - acc: 0.9737 - val_loss: 0.2189 - val_acc: 0.9048
Epoch 80/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0994 - acc: 0.9737 - val_loss: 0.2216 - val_acc: 0.9524
Epoch 81/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0974 - acc: 0.9737 - val_loss: 0.2144 - val_acc: 0.9048
Epoch 82/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0963 - acc: 0.9737 - val_loss: 0.2173 - val_acc: 0.9524
Epoch 83/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0954 - acc: 0.9737 - val_loss: 0.2157 - val_acc: 0.9524
Epoch 84/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0980 - acc: 0.9737 - val_loss: 0.2252 - val_acc: 0.9524
Epoch 85/100
12/12 [==============================] - 0s 6ms/step - loss: 0.0957 - acc: 0.9825 - val_loss: 0.2069 - val_acc: 0.9048
Epoch 86/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0901 - acc: 0.9737 - val_loss: 0.2224 - val_acc: 0.9524
Epoch 87/100
12/12 [==============================] - 0s 6ms/step - loss: 0.0906 - acc: 0.9737 - val_loss: 0.2121 - val_acc: 0.9524
Epoch 88/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0898 - acc: 0.9825 - val_loss: 0.2081 - val_acc: 0.9048
Epoch 89/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0894 - acc: 0.9737 - val_loss: 0.2169 - val_acc: 0.9524
Epoch 90/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0882 - acc: 0.9737 - val_loss: 0.2053 - val_acc: 0.9048
Epoch 91/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0883 - acc: 0.9825 - val_loss: 0.2083 - val_acc: 0.9524
Epoch 92/100
12/12 [==============================] - 0s 6ms/step - loss: 0.0879 - acc: 0.9737 - val_loss: 0.2135 - val_acc: 0.9524
Epoch 93/100
12/12 [==============================] - 0s 6ms/step - loss: 0.0848 - acc: 0.9737 - val_loss: 0.1973 - val_acc: 0.9048
Epoch 94/100
12/12 [==============================] - 0s 6ms/step - loss: 0.0835 - acc: 0.9737 - val_loss: 0.2109 - val_acc: 0.9524
Epoch 95/100
12/12 [==============================] - 0s 6ms/step - loss: 0.0852 - acc: 0.9737 - val_loss: 0.2104 - val_acc: 0.9524
Epoch 96/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0877 - acc: 0.9737 - val_loss: 0.2028 - val_acc: 0.9048
Epoch 97/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0825 - acc: 0.9825 - val_loss: 0.1954 - val_acc: 0.9048
Epoch 98/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0793 - acc: 0.9737 - val_loss: 0.2177 - val_acc: 0.9524
Epoch 99/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0809 - acc: 0.9737 - val_loss: 0.2011 - val_acc: 0.9524
Epoch 100/100
12/12 [==============================] - 0s 5ms/step - loss: 0.0867 - acc: 0.9737 - val_loss: 0.1895 - val_acc: 0.9048
Out[10]:
<tensorflow.python.keras.callbacks.History at 0x1ca70a75250>
In [11]:
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
flatten (Flatten)            (None, 4)                 0         
_________________________________________________________________
dense (Dense)                (None, 64)                320       
_________________________________________________________________
dense_1 (Dense)              (None, 3)                 195       
=================================================================
Total params: 515
Trainable params: 515
Non-trainable params: 0
_________________________________________________________________
In [12]:
from sklearn.metrics import confusion_matrix

loss, accuracy = model.evaluate(X_test, Y_test)
print('Akurasi Testing ANN:', accuracy)
1/1 [==============================] - 0s 48ms/step - loss: 0.1321 - acc: 1.0000
Akurasi Testing ANN: 1.0